library(rstan)
Loading required package: StanHeaders
Loading required package: ggplot2
rstan (Version 2.21.1, GitRev: 2e1f913d3ca3)
For execution on a local, multicore CPU with excess RAM we recommend calling
options(mc.cores = parallel::detectCores()).
To avoid recompilation of unchanged Stan programs, we recommend calling
rstan_options(auto_write = TRUE)
library(survival)
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
── Attaching packages ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse 1.3.1 ──
✓ tibble  3.1.4     ✓ dplyr   1.0.7
✓ tidyr   1.1.3     ✓ stringr 1.4.0
✓ readr   2.0.1     ✓ forcats 0.5.1
✓ purrr   0.3.4     
── Conflicts ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────── tidyverse_conflicts() ──
x tidyr::extract() masks rstan::extract()
x dplyr::filter()  masks stats::filter()
x dplyr::lag()     masks stats::lag()
library(tidybayes)
library(scales)

Attaching package: ‘scales’

The following object is masked from ‘package:purrr’:

    discard

The following object is masked from ‘package:readr’:

    col_factor
library(survminer)
Loading required package: ggpubr
Registered S3 method overwritten by 'data.table':
  method           from
  print.data.table     
# data, parameters, model and generated quantities blocks
Stan_exponential_survival_model <- "
data{
  int <lower=1> N_uncensored;
  int <lower=1> N_censored;
  int <lower=0> numCovariates;
  matrix[N_censored, numCovariates] X_censored;
  matrix[N_uncensored, numCovariates] X_uncensored;
  vector <lower=0>[N_censored] times_censored;
  vector <lower=0>[N_uncensored] times_uncensored;
}

parameters{
  vector[numCovariates] beta; //regression coefficients
  real alpha; //intercept
}

model{
  beta ~ normal(0,10); //prior on regression coefficients
  alpha ~ normal(0,10); //prior on intercept
  target += exponential_lpdf(times_uncensored | exp(alpha+X_uncensored * beta)); //log-likelihood part for uncensored times
  target += exponential_lccdf(times_censored | exp(alpha+X_censored * beta)); //log-likelihood for censored times
}

generated quantities{
  vector[N_uncensored] times_uncensored_sampled; //prediction of death
  for(i in 1:N_uncensored) {
    times_uncensored_sampled[i] = exponential_rng(exp(alpha+X_uncensored[i,]* beta));
  }
}
"
# prepare the data
set.seed(42); 
require (tidyverse);
data <- read_csv('../data/necessary_fields.csv')
Rows: 2066 Columns: 7
── Column specification ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr (1): host_type
dbl (1): duration_months
lgl (5): major_releases, censored, high_rev_freq, multi_repo, high_author_count

ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
N <- nrow (data);
data$multi_repo <- car::recode(data$multi_repo, "'TRUE' = 0; 'FALSE' = 1")
data$censored <- car::recode(data$censored, "'TRUE' = 0; 'FALSE' = 1")
X <- as.matrix(pull(data, multi_repo)); 
is_censored <- pull(data, censored)==0; 
times <- pull(data, duration_months); 
msk_censored <- is_censored == 1; 
N_censored <- sum(msk_censored);
# put data into a list for Stan
Stan_data <- list (N_uncensored = N - N_censored, 
                    N_censored = N_censored,
                    numCovariates = ncol(X), 
                    X_censored = as.matrix(X[msk_censored,]),
                    X_uncensored = as.matrix(X[!msk_censored ,]), 
                    times_censored = times[msk_censored],
                    times_uncensored = times[!msk_censored])
# fit Stan model
require(rstan)
exp_surv_model_fit <- suppressMessages(stan(model_code = Stan_exponential_survival_model, data = Stan_data))
sh: Data/bayesian: No such file or directory
Warning in system2(CXX, args = ARGS) : error in running command
Warning in file.remove(c(unprocessed, processed)) :
  cannot remove file '/var/folders/q8/7tchbyvd1dj3hkgw5ffkk6ph0000gp/T//RtmpKsHNQc/file9c702b7c6dec.stan', reason 'No such file or directory'
sh: clang++ -mmacosx-version-min=10.13: command not found

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 1).
Chain 1: 
Chain 1: Gradient evaluation took 0.000481 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 4.81 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1: 
Chain 1: 
Chain 1: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 1: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 1: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 1: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 1: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 1: 
Chain 1:  Elapsed Time: 2.52927 seconds (Warm-up)
Chain 1:                2.50296 seconds (Sampling)
Chain 1:                5.03223 seconds (Total)
Chain 1: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 2).
Chain 2: 
Chain 2: Gradient evaluation took 0.000196 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 1.96 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2: 
Chain 2: 
Chain 2: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 2: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 2: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 2: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 2: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 2: 
Chain 2:  Elapsed Time: 2.9037 seconds (Warm-up)
Chain 2:                3.17301 seconds (Sampling)
Chain 2:                6.07671 seconds (Total)
Chain 2: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 3).
Chain 3: 
Chain 3: Gradient evaluation took 0.000203 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 2.03 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3: 
Chain 3: 
Chain 3: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 3: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 3: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 3: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 3: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 3: 
Chain 3:  Elapsed Time: 2.67719 seconds (Warm-up)
Chain 3:                2.95634 seconds (Sampling)
Chain 3:                5.63353 seconds (Total)
Chain 3: 

SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 4).
Chain 4: 
Chain 4: Gradient evaluation took 0.000195 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 1.95 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4: 
Chain 4: 
Chain 4: Iteration:    1 / 2000 [  0%]  (Warmup)
Chain 4: Iteration:  200 / 2000 [ 10%]  (Warmup)
Chain 4: Iteration:  400 / 2000 [ 20%]  (Warmup)
Chain 4: Iteration:  600 / 2000 [ 30%]  (Warmup)
Chain 4: Iteration:  800 / 2000 [ 40%]  (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%]  (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%]  (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%]  (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%]  (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%]  (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%]  (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%]  (Sampling)
Chain 4: 
Chain 4:  Elapsed Time: 3.13203 seconds (Warm-up)
Chain 4:                2.85686 seconds (Sampling)
Chain 4:                5.98889 seconds (Total)
Chain 4: 
# print model fit
print(get_seed(exp_surv_model_fit))
[1] 1781592037
# print fit summary
fit_summary <- summary(exp_surv_model_fit)
print(fit_summary$summary)
                                      mean     se_mean          sd          2.5%          25%          50%          75%        97.5%     n_eff      Rhat
beta[1]                           1.237474 0.007908949   0.2184724     0.8278381     1.092083     1.229948     1.374603     1.687030  763.0548 1.0006992
alpha                            -5.998177 0.007768561   0.2152980    -6.4454373    -6.138731    -5.990108    -5.854118    -5.587744  768.0666 1.0006953
times_uncensored_sampled[1]     114.563520 1.837459182 114.0407784     2.4568818    31.892139    79.240041   161.134308   417.622660 3851.9882 1.0007646
times_uncensored_sampled[2]     115.644401 1.824486836 114.8785914     3.0066199    34.650552    81.793933   159.445637   429.419951 3964.5759 1.0003087
times_uncensored_sampled[3]     116.819410 1.842932854 113.8019114     3.5796150    35.062306    83.334274   160.085429   438.103136 3813.1166 0.9993520
times_uncensored_sampled[4]     119.436497 1.884441829 120.0833701     3.0544884    33.863453    81.209952   164.857425   442.464711 4060.6940 1.0002426
times_uncensored_sampled[5]     117.891291 1.817152486 116.7332958     3.7771450    33.809990    80.814402   166.268726   436.095921 4126.7366 1.0006293
times_uncensored_sampled[6]     116.994273 1.898037634 118.9686408     2.7500939    33.048916    80.898408   161.612631   444.910007 3928.7587 1.0001449
times_uncensored_sampled[7]     117.642811 1.869465177 116.1300834     3.1787506    34.571120    82.758131   165.274436   422.580337 3858.8217 1.0004184
times_uncensored_sampled[8]     115.178674 1.772909730 114.7357260     3.1337112    32.450490    79.773488   161.266041   412.390758 4188.1679 1.0004307
times_uncensored_sampled[9]     116.330523 1.947957209 117.2093441     2.5507989    33.348408    80.765037   160.234529   428.850406 3620.4758 0.9995617
times_uncensored_sampled[10]    115.790518 1.807664746 115.0930266     2.8692665    33.800092    80.321039   161.776534   423.529276 4053.7993 0.9996944
times_uncensored_sampled[11]    117.596197 1.837352799 116.7778888     2.8208116    33.689348    82.780866   161.873819   430.216108 4039.5792 0.9997525
times_uncensored_sampled[12]    115.194094 1.804527647 115.2445760     2.7235515    32.542163    79.188164   161.477446   426.862284 4078.6262 1.0005661
times_uncensored_sampled[13]    116.732502 1.839843284 115.4392163     3.0360671    35.188911    82.101549   162.036819   418.718626 3936.8156 0.9997012
times_uncensored_sampled[14]    120.484399 1.932144909 122.1088624     2.8011592    32.853501    81.553801   171.038773   439.683274 3994.0636 0.9999205
times_uncensored_sampled[15]    117.403060 1.924548124 120.1508774     3.3964327    32.913646    78.768683   160.163404   445.568811 3897.5917 0.9995690
times_uncensored_sampled[16]    116.369742 1.802165029 113.4495087     3.5083646    34.033509    78.701440   165.202254   416.545608 3962.9275 1.0000141
times_uncensored_sampled[17]    116.525019 1.895350812 116.4297149     2.4801724    34.114123    81.348031   159.156231   424.726445 3773.5356 0.9998803
times_uncensored_sampled[18]    119.044333 1.980047633 118.0862989     2.8090583    34.250687    81.359318   166.654472   444.460092 3556.7042 1.0007019
times_uncensored_sampled[19]    117.400332 1.991495531 118.5693667     3.1160080    33.366716    79.043252   162.056659   439.376097 3544.7559 1.0008059
times_uncensored_sampled[20]    117.810152 1.872014546 119.2956470     2.6207819    33.996288    81.347571   164.458612   425.105602 4060.9791 0.9998185
times_uncensored_sampled[21]    118.554440 1.927248799 117.8609877     2.9556187    34.316002    82.482623   163.798112   435.011340 3739.9396 0.9997950
times_uncensored_sampled[22]    118.974024 1.874065446 119.4651380     2.6225398    34.036153    81.736563   165.920045   445.871203 4063.6180 0.9993442
times_uncensored_sampled[23]    112.874179 1.946434626 110.5213808     2.9174608    33.069418    78.921806   157.450987   420.929850 3224.1331 1.0004010
times_uncensored_sampled[24]    412.535952 7.538444523 437.3392704     8.8759851   111.343655   277.214774   560.050792  1534.868300 3365.6850 0.9998746
times_uncensored_sampled[25]    122.224853 1.865218120 121.4751825     2.8493874    35.925866    84.719405   174.068793   441.735914 4241.4648 0.9997263
times_uncensored_sampled[26]    120.101841 1.906880036 118.2023972     3.5108834    34.402372    85.264381   165.074265   436.611712 3842.4288 0.9992165
times_uncensored_sampled[27]    120.008485 1.988773583 121.9260060     3.0607592    34.211453    82.206197   162.064643   461.123365 3758.5645 1.0003699
times_uncensored_sampled[28]    115.479381 1.835497692 115.8867913     2.5151007    32.290426    79.871596   161.424153   417.874353 3986.2102 0.9997327
times_uncensored_sampled[29]    117.031797 1.888034853 118.5345351     2.9538814    34.661931    80.570718   161.682045   440.602873 3941.5749 0.9998649
times_uncensored_sampled[30]    115.314226 1.898743040 118.9851846     2.9359084    31.066262    76.351228   160.507405   432.812460 3926.9321 0.9997087
times_uncensored_sampled[31]    117.638162 2.004749081 122.1925394     3.1820807    32.814989    78.228938   161.525429   450.702140 3715.0900 0.9995991
times_uncensored_sampled[32]    117.138188 1.870751662 115.4297883     2.9815895    33.253829    82.334081   162.271021   432.722015 3807.1810 0.9997263
times_uncensored_sampled[33]    115.717401 1.869458231 113.9879092     3.0644653    33.419130    81.314320   163.825196   413.254806 3717.8002 1.0001079
times_uncensored_sampled[34]    120.008500 1.905909502 121.3622074     3.2532771    34.987746    84.580338   164.115599   429.851116 4054.7341 1.0000852
times_uncensored_sampled[35]    116.750810 1.837259559 116.3287138     3.1440953    33.006976    81.980091   162.169939   440.512405 4008.9701 1.0005699
times_uncensored_sampled[36]    117.490932 1.867983649 115.7511571     2.7508074    34.075965    81.559165   164.176301   424.451641 3839.7641 1.0010271
times_uncensored_sampled[37]    116.315164 1.879284234 118.8415098     3.1796313    32.286362    79.199806   162.545072   424.633358 3998.9998 0.9993207
times_uncensored_sampled[38]    116.307542 1.911654730 117.5921404     2.6258308    33.810689    79.021481   159.201416   434.790855 3783.8828 0.9998369
times_uncensored_sampled[39]    117.330894 2.038261433 115.8941385     2.8272365    33.305807    81.343545   165.269990   411.787547 3232.9811 1.0000191
times_uncensored_sampled[40]    113.440341 1.885845145 117.1806116     2.7528183    31.144236    77.288693   156.465217   444.661049 3860.9970 0.9997371
times_uncensored_sampled[41]    115.769893 1.729463859 111.4090935     3.3448325    32.687791    81.545298   164.756267   415.994483 4149.7153 0.9996285
times_uncensored_sampled[42]    117.813343 1.848612534 116.1864425     2.9039792    34.922921    82.028088   161.709660   426.265195 3950.2003 0.9999293
times_uncensored_sampled[43]    118.255772 1.947199549 118.1612646     3.7630707    33.311039    80.860933   164.336246   431.090969 3682.3862 1.0012290
times_uncensored_sampled[44]    117.292285 1.908341917 119.0940559     2.9862616    34.415958    81.277258   157.435925   439.437872 3894.6442 1.0006046
times_uncensored_sampled[45]    118.649344 1.898512651 119.2687297     3.1068938    34.151504    83.060229   162.862096   427.838978 3946.6280 0.9998051
times_uncensored_sampled[46]    115.365830 1.876617919 114.6354073     3.0379727    33.651592    79.644413   159.166936   413.317493 3731.5206 0.9995702
times_uncensored_sampled[47]    115.529657 1.889105491 114.9938524     2.5788932    34.245325    78.746230   161.103656   428.182942 3705.4147 0.9994047
times_uncensored_sampled[48]    114.389154 1.893868434 118.2190185     3.1155235    32.191944    78.690477   156.914497   433.289514 3896.5037 0.9993100
times_uncensored_sampled[49]    116.734025 1.862236654 112.9391220     2.7846205    33.515822    83.759398   162.486539   417.845663 3678.0622 0.9997950
times_uncensored_sampled[50]    116.494561 1.868064152 118.4261878     2.9034874    31.797247    78.981033   161.876063   443.160527 4018.9437 0.9997018
times_uncensored_sampled[51]    116.783831 1.877234552 116.8204766     2.5847164    33.962558    81.272447   162.028854   430.925734 3872.5842 0.9998843
times_uncensored_sampled[52]    116.434195 1.805444549 113.1539634     3.1457660    34.038188    80.448871   163.217093   412.727745 3927.9978 0.9996780
times_uncensored_sampled[53]    123.832003 1.943816408 122.8065188     3.1345705    35.446753    85.059660   172.885103   459.750971 3991.4653 0.9997243
times_uncensored_sampled[54]    116.219361 1.829368154 113.7842417     2.9034064    34.720874    79.349423   163.913418   422.544673 3868.6729 0.9992595
times_uncensored_sampled[55]    115.001415 1.791344146 113.6660926     2.9178405    34.365971    81.236770   158.089230   414.184198 4026.2784 0.9999097
times_uncensored_sampled[56]    115.112713 1.892069439 114.6985686     3.0249014    32.716065    78.473881   159.588126   436.220531 3674.8689 1.0001461
times_uncensored_sampled[57]    115.016108 1.865289093 115.2950040     2.7582524    31.622085    80.395632   160.785592   419.949710 3820.5745 1.0007811
times_uncensored_sampled[58]    115.792372 1.862052684 115.4349216     3.3789523    34.063507    80.477616   158.535325   431.965162 3843.1780 0.9999365
times_uncensored_sampled[59]    116.665262 1.913401316 119.2563448     2.8560893    32.996410    80.561082   160.374985   445.219239 3884.6407 0.9996020
times_uncensored_sampled[60]    115.887370 1.798642670 116.0210876     3.2422545    33.354128    80.279383   156.772636   420.035207 4160.8694 1.0005379
times_uncensored_sampled[61]    119.368524 1.927472043 119.5896981     3.2185836    34.809389    82.310272   167.905618   436.969271 3849.5624 1.0003193
times_uncensored_sampled[62]    118.030807 1.894313127 116.7588967     2.5281294    33.538741    82.327326   165.377599   414.794693 3799.0624 1.0007484
times_uncensored_sampled[63]    118.750642 1.876850014 118.8846170     3.1421360    33.881112    82.421983   161.918312   427.132127 4012.2888 0.9998191
times_uncensored_sampled[64]    112.714862 1.850772502 119.5454497     2.3096143    30.208798    74.551735   154.363131   448.141258 4172.1511 0.9994665
times_uncensored_sampled[65]    114.833955 1.865581917 116.8287301     3.2116885    33.135417    77.448960   161.083267   424.595457 3921.6666 1.0004827
times_uncensored_sampled[66]    120.709803 1.863691966 120.3623805     3.1975269    34.606598    84.126169   166.847803   442.063917 4170.9335 1.0001504
times_uncensored_sampled[67]    117.608570 1.850573651 118.0277614     2.6612538    32.350371    82.800411   164.455008   431.418447 4067.7625 0.9997873
times_uncensored_sampled[68]    116.439320 1.807300836 117.8952067     2.7461112    32.597916    79.356567   162.856349   432.226396 4255.3119 0.9996318
times_uncensored_sampled[69]    117.117251 1.917045288 119.0928814     2.6431777    33.015902    79.121966   162.665533   427.898552 3859.2851 1.0000418
times_uncensored_sampled[70]    119.574233 1.918985482 118.1748250     2.9884185    34.976773    82.913042   168.390967   429.680146 3792.3338 0.9991319
times_uncensored_sampled[71]    116.605720 1.906257779 119.2862296     2.7526822    33.302700    76.729040   161.865513   437.952770 3915.7717 1.0000881
times_uncensored_sampled[72]    115.509036 1.955351817 123.1091810     2.4017121    32.660218    78.658996   156.277202   442.198192 3963.9765 1.0000233
times_uncensored_sampled[73]    113.145579 1.863083493 114.8336712     3.1178683    31.127114    79.076104   157.029957   406.780334 3799.0400 1.0008255
times_uncensored_sampled[74]    116.571755 1.865331676 116.0904974     2.5603608    34.657257    78.990652   163.432567   433.393119 3873.3007 1.0001140
times_uncensored_sampled[75]    117.312164 1.877487842 114.8107333     2.6538158    33.547124    82.544603   165.028549   419.693848 3739.4757 1.0003857
times_uncensored_sampled[76]    115.911359 1.869481071 115.9818912     3.3614961    34.878681    79.253067   158.301583   425.638227 3848.9142 1.0011785
times_uncensored_sampled[77]    120.009590 1.895298767 117.5454427     2.8654953    34.633717    85.100487   168.210187   439.068696 3846.4158 1.0008081
times_uncensored_sampled[78]    118.500651 1.874885470 117.9023659     3.2866989    34.819864    80.924121   163.945429   440.115184 3954.5362 1.0009745
times_uncensored_sampled[79]    119.614612 2.020632684 122.6015678     2.8559348    34.526590    84.059916   162.369025   445.900877 3681.4364 1.0001408
times_uncensored_sampled[80]    113.905370 1.844540633 115.6216388     3.1838905    33.543140    78.666561   159.872830   415.453297 3929.1788 1.0000333
times_uncensored_sampled[81]    117.753559 1.853328060 117.0974099     2.6908536    34.871810    79.696405   162.235825   443.008500 3991.9949 1.0000064
times_uncensored_sampled[82]    115.703053 2.013782885 115.7796032     2.6256264    32.994229    80.446445   163.457570   425.574418 3305.5126 1.0004408
times_uncensored_sampled[83]    119.260615 1.852948058 120.3237606     2.7645064    34.279221    81.230812   165.135477   449.495711 4216.7349 1.0010982
times_uncensored_sampled[84]    116.102750 1.803915927 114.9241674     3.5769031    34.250719    80.938078   162.146585   419.588211 4058.7299 1.0000843
times_uncensored_sampled[85]    117.128847 1.870386431 116.5782726     3.4428239    32.725839    80.535864   163.054385   438.232988 3884.8347 0.9996910
times_uncensored_sampled[86]    116.658181 1.866241569 117.3398034     2.9689830    32.707535    79.822101   162.119750   436.114860 3953.2565 1.0000681
times_uncensored_sampled[87]    115.771211 1.821585559 115.8509865     3.2812594    32.709645    79.642455   160.142693   428.324817 4044.8305 0.9996226
times_uncensored_sampled[88]    116.983857 1.854787396 116.9207721     3.3877242    34.109489    80.678860   164.119449   435.477492 3973.7000 1.0001547
times_uncensored_sampled[89]    118.521314 1.886370168 119.4089176     3.1499202    34.086960    82.342311   162.202195   440.885839 4007.0032 0.9997752
times_uncensored_sampled[90]    117.759313 1.851909929 116.1639029     2.9147852    34.861812    83.094948   165.388242   426.890184 3934.6189 0.9996183
times_uncensored_sampled[91]    114.943629 1.820470956 114.3465664     3.0005340    33.639162    81.027187   159.718043   425.806096 3945.2883 0.9990669
times_uncensored_sampled[92]    117.865918 1.934206852 118.6019196     2.2970831    32.863038    82.113594   164.478941   435.248376 3759.9113 1.0000991
times_uncensored_sampled[93]    115.718864 1.885305543 119.1521793     2.4855951    31.825874    78.107677   155.257531   436.486745 3994.2983 0.9998687
times_uncensored_sampled[94]    113.741959 1.708122522 109.6807791     2.3645031    34.269941    79.934860   160.234520   410.682726 4123.0919 1.0009693
times_uncensored_sampled[95]    117.662357 1.879326857 119.3114186     3.1481477    33.887090    78.611047   163.444833   445.947579 4030.5042 0.9995995
times_uncensored_sampled[96]    117.101244 1.852471606 115.9192904     2.9521098    34.046265    81.762770   162.968288   418.686646 3915.6901 1.0003979
times_uncensored_sampled[97]    116.068221 1.814546619 115.7858077     3.2460162    32.352943    82.236391   160.723125   420.855401 4071.6871 0.9994627
times_uncensored_sampled[98]    112.737374 1.816119395 114.3306297     2.7398460    31.941124    76.296925   156.120253   410.530970 3963.1125 0.9995505
 [ reached getOption("max.print") -- omitted 678 rows ]
exp_surv_model_draws <- tidybayes::tidy_draws(exp_surv_model_fit)
exp_surv_model_draws
## Constructor for Strata-specific survival function
construct_survival_function <- function(alpha, beta, x) {
    function(t) {
        lambda <- exp(alpha + x*beta)
        exp(-(lambda * t))
    }
}

## Random functions
exp_surv_model_surv_func <-
    exp_surv_model_draws %>%
    select(.chain, .iteration, .draw, alpha, `beta[1]`) %>%
    ## Simplify name
    rename(beta = `beta[1]`) %>%
    ## Construct realization of random functions
    mutate(`S(t|1)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,1)}),
           `S(t|0)` = pmap(list(alpha, beta), function(a,b) {construct_survival_function(a,b,0)}))
exp_surv_model_surv_func
times <- seq(from = 0, to = 165, by = 0.1)
times_df <- data_frame(t = times)
Warning: `data_frame()` was deprecated in tibble 1.1.0.
Please use `tibble()` instead.
This warning is displayed once every 8 hours.
Call `lifecycle::last_lifecycle_warnings()` to see where this warning was generated.
## Try first realizations
exp_surv_model_surv_func$`S(t|1)`[[1]](times[1:10])
 [1] 1.0000000 0.9991831 0.9983668 0.9975512 0.9967362 0.9959220 0.9951084 0.9942954 0.9934831 0.9926715
exp_surv_model_surv_func$`S(t|0)`[[1]](times[1:10])
 [1] 1.0000000 0.9997208 0.9994417 0.9991626 0.9988837 0.9986048 0.9983260 0.9980472 0.9977686 0.9974900
## Apply all realizations
exp_surv_model_survival <-
    exp_surv_model_surv_func %>%
    mutate(times_df = list(times_df)) %>%
    mutate(times_df = pmap(list(times_df, `S(t|1)`, `S(t|0)`),
                           function(df, s1, s0) {df %>% mutate(s1 = s1(t),
                                                               s0 = s0(t))})) %>%
    select(-`S(t|1)`, -`S(t|0)`) %>%
    unnest(cols = c(times_df)) %>%
    gather(key = Strata, value = survival, s1, s0) %>%
    mutate(Strata = factor(Strata, # Strata is whether or not something is hosted on multiple repos or not
                              levels = c("s1","s0"),
                              labels = c("multi repositories=No","multi repositories=Yes")))

## Average on survival scale
exp_surv_model_survival_mean <-
    exp_surv_model_survival %>%
    group_by(Strata, t) %>%
    summarize(survival_mean = mean(survival),
              survival_95upper = quantile(survival, probs = 0.975),
              survival_95lower = quantile(survival, probs = 0.025))
`summarise()` has grouped output by 'Strata'. You can override using the `.groups` argument.
exp_surv_model_survival
# plot the graphs
(ggplot(data = exp_surv_model_survival, mapping = aes(x = t, y = survival, color = Strata, group = interaction(.chain,.draw,Strata))) 
  + theme_survminer()
 + geom_line(size = 0.1, alpha = 0.02) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_mean, group = Strata)) 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95upper, group = Strata), linetype = "dotted") 
 + geom_line(data = exp_surv_model_survival_mean, mapping = aes(y = survival_95lower, group = Strata), linetype = "dotted")
 + scale_y_continuous(limits=c(0,1))
 + scale_x_continuous(breaks = c(0,40,80,120,160))
 +labs(x = "Time (Months)", y = "Survival probability"))

LS0tCnRpdGxlOiAiQmF5ZXNpYW4gc3Vydml2YWwgYW5hbHlzaXMgdXNpbmcgbXVsdGlwbGUgcmVwb3NpdG9yaWVzIGFzIGEgcHJlZGljdG9yIgpvdXRwdXQ6IGh0bWxfbm90ZWJvb2sKLS0tCmBgYHtyfQpsaWJyYXJ5KHJzdGFuKQpsaWJyYXJ5KHN1cnZpdmFsKQpsaWJyYXJ5KHRpZHl2ZXJzZSkKbGlicmFyeSh0aWR5YmF5ZXMpCmxpYnJhcnkoc2NhbGVzKQpsaWJyYXJ5KHN1cnZtaW5lcikKYGBgCiAKCmBgYHtyfQojIGRhdGEsIHBhcmFtZXRlcnMsIG1vZGVsIGFuZCBnZW5lcmF0ZWQgcXVhbnRpdGllcyBibG9ja3MKU3Rhbl9leHBvbmVudGlhbF9zdXJ2aXZhbF9tb2RlbCA8LSAiCmRhdGF7CiAgaW50IDxsb3dlcj0xPiBOX3VuY2Vuc29yZWQ7CiAgaW50IDxsb3dlcj0xPiBOX2NlbnNvcmVkOwogIGludCA8bG93ZXI9MD4gbnVtQ292YXJpYXRlczsKICBtYXRyaXhbTl9jZW5zb3JlZCwgbnVtQ292YXJpYXRlc10gWF9jZW5zb3JlZDsKICBtYXRyaXhbTl91bmNlbnNvcmVkLCBudW1Db3ZhcmlhdGVzXSBYX3VuY2Vuc29yZWQ7CiAgdmVjdG9yIDxsb3dlcj0wPltOX2NlbnNvcmVkXSB0aW1lc19jZW5zb3JlZDsKICB2ZWN0b3IgPGxvd2VyPTA+W05fdW5jZW5zb3JlZF0gdGltZXNfdW5jZW5zb3JlZDsKfQoKcGFyYW1ldGVyc3sKICB2ZWN0b3JbbnVtQ292YXJpYXRlc10gYmV0YTsgLy9yZWdyZXNzaW9uIGNvZWZmaWNpZW50cwogIHJlYWwgYWxwaGE7IC8vaW50ZXJjZXB0Cn0KCm1vZGVsewogIGJldGEgfiBub3JtYWwoMCwxMCk7IC8vcHJpb3Igb24gcmVncmVzc2lvbiBjb2VmZmljaWVudHMKICBhbHBoYSB+IG5vcm1hbCgwLDEwKTsgLy9wcmlvciBvbiBpbnRlcmNlcHQKICB0YXJnZXQgKz0gZXhwb25lbnRpYWxfbHBkZih0aW1lc191bmNlbnNvcmVkIHwgZXhwKGFscGhhK1hfdW5jZW5zb3JlZCAqIGJldGEpKTsgLy9sb2ctbGlrZWxpaG9vZCBwYXJ0IGZvciB1bmNlbnNvcmVkIHRpbWVzCiAgdGFyZ2V0ICs9IGV4cG9uZW50aWFsX2xjY2RmKHRpbWVzX2NlbnNvcmVkIHwgZXhwKGFscGhhK1hfY2Vuc29yZWQgKiBiZXRhKSk7IC8vbG9nLWxpa2VsaWhvb2QgZm9yIGNlbnNvcmVkIHRpbWVzCn0KCmdlbmVyYXRlZCBxdWFudGl0aWVzewogIHZlY3RvcltOX3VuY2Vuc29yZWRdIHRpbWVzX3VuY2Vuc29yZWRfc2FtcGxlZDsgLy9wcmVkaWN0aW9uIG9mIGRlYXRoCiAgZm9yKGkgaW4gMTpOX3VuY2Vuc29yZWQpIHsKICAgIHRpbWVzX3VuY2Vuc29yZWRfc2FtcGxlZFtpXSA9IGV4cG9uZW50aWFsX3JuZyhleHAoYWxwaGErWF91bmNlbnNvcmVkW2ksXSogYmV0YSkpOwogIH0KfQoiCmBgYAoKYGBge3J9CiMgcHJlcGFyZSB0aGUgZGF0YQpzZXQuc2VlZCg0Mik7IApyZXF1aXJlICh0aWR5dmVyc2UpOwpkYXRhIDwtIHJlYWRfY3N2KCcuLi9kYXRhL25lY2Vzc2FyeV9maWVsZHMuY3N2JykKTiA8LSBucm93IChkYXRhKTsKZGF0YSRtdWx0aV9yZXBvIDwtIGNhcjo6cmVjb2RlKGRhdGEkbXVsdGlfcmVwbywgIidUUlVFJyA9IDA7ICdGQUxTRScgPSAxIikKZGF0YSRjZW5zb3JlZCA8LSBjYXI6OnJlY29kZShkYXRhJGNlbnNvcmVkLCAiJ1RSVUUnID0gMDsgJ0ZBTFNFJyA9IDEiKQpYIDwtIGFzLm1hdHJpeChwdWxsKGRhdGEsIG11bHRpX3JlcG8pKTsgCmlzX2NlbnNvcmVkIDwtIHB1bGwoZGF0YSwgY2Vuc29yZWQpPT0wOyAKdGltZXMgPC0gcHVsbChkYXRhLCBkdXJhdGlvbl9tb250aHMpOyAKbXNrX2NlbnNvcmVkIDwtIGlzX2NlbnNvcmVkID09IDE7IApOX2NlbnNvcmVkIDwtIHN1bShtc2tfY2Vuc29yZWQpOwpgYGAKCmBgYHtyfQojIHB1dCBkYXRhIGludG8gYSBsaXN0IGZvciBTdGFuClN0YW5fZGF0YSA8LSBsaXN0IChOX3VuY2Vuc29yZWQgPSBOIC0gTl9jZW5zb3JlZCwgCiAgICAgICAgICAgICAgICAgICAgTl9jZW5zb3JlZCA9IE5fY2Vuc29yZWQsCiAgICAgICAgICAgICAgICAgICAgbnVtQ292YXJpYXRlcyA9IG5jb2woWCksIAogICAgICAgICAgICAgICAgICAgIFhfY2Vuc29yZWQgPSBhcy5tYXRyaXgoWFttc2tfY2Vuc29yZWQsXSksCiAgICAgICAgICAgICAgICAgICAgWF91bmNlbnNvcmVkID0gYXMubWF0cml4KFhbIW1za19jZW5zb3JlZCAsXSksIAogICAgICAgICAgICAgICAgICAgIHRpbWVzX2NlbnNvcmVkID0gdGltZXNbbXNrX2NlbnNvcmVkXSwKICAgICAgICAgICAgICAgICAgICB0aW1lc191bmNlbnNvcmVkID0gdGltZXNbIW1za19jZW5zb3JlZF0pCmBgYAoKYGBge3J9CiMgZml0IFN0YW4gbW9kZWwKcmVxdWlyZShyc3RhbikKZXhwX3N1cnZfbW9kZWxfZml0IDwtIHN1cHByZXNzTWVzc2FnZXMoc3Rhbihtb2RlbF9jb2RlID0gU3Rhbl9leHBvbmVudGlhbF9zdXJ2aXZhbF9tb2RlbCwgZGF0YSA9IFN0YW5fZGF0YSkpCmBgYAoKYGBge3J9CiMgcHJpbnQgbW9kZWwgZml0CnByaW50KGdldF9zZWVkKGV4cF9zdXJ2X21vZGVsX2ZpdCkpCmBgYAoKYGBge3J9CiMgcHJpbnQgZml0IHN1bW1hcnkKZml0X3N1bW1hcnkgPC0gc3VtbWFyeShleHBfc3Vydl9tb2RlbF9maXQpCnByaW50KGZpdF9zdW1tYXJ5JHN1bW1hcnkpCmBgYAoKYGBge3J9CmV4cF9zdXJ2X21vZGVsX2RyYXdzIDwtIHRpZHliYXllczo6dGlkeV9kcmF3cyhleHBfc3Vydl9tb2RlbF9maXQpCmV4cF9zdXJ2X21vZGVsX2RyYXdzCmBgYAogCmBgYHtyfQojIyBDb25zdHJ1Y3RvciBmb3IgU3RyYXRhLXNwZWNpZmljIHN1cnZpdmFsIGZ1bmN0aW9uCmNvbnN0cnVjdF9zdXJ2aXZhbF9mdW5jdGlvbiA8LSBmdW5jdGlvbihhbHBoYSwgYmV0YSwgeCkgewogICAgZnVuY3Rpb24odCkgewogICAgICAgIGxhbWJkYSA8LSBleHAoYWxwaGEgKyB4KmJldGEpCiAgICAgICAgZXhwKC0obGFtYmRhICogdCkpCiAgICB9Cn0KCiMjIFJhbmRvbSBmdW5jdGlvbnMKZXhwX3N1cnZfbW9kZWxfc3Vydl9mdW5jIDwtCiAgICBleHBfc3Vydl9tb2RlbF9kcmF3cyAlPiUKICAgIHNlbGVjdCguY2hhaW4sIC5pdGVyYXRpb24sIC5kcmF3LCBhbHBoYSwgYGJldGFbMV1gKSAlPiUKICAgICMjIFNpbXBsaWZ5IG5hbWUKICAgIHJlbmFtZShiZXRhID0gYGJldGFbMV1gKSAlPiUKICAgICMjIENvbnN0cnVjdCByZWFsaXphdGlvbiBvZiByYW5kb20gZnVuY3Rpb25zCiAgICBtdXRhdGUoYFModHwxKWAgPSBwbWFwKGxpc3QoYWxwaGEsIGJldGEpLCBmdW5jdGlvbihhLGIpIHtjb25zdHJ1Y3Rfc3Vydml2YWxfZnVuY3Rpb24oYSxiLDEpfSksCiAgICAgICAgICAgYFModHwwKWAgPSBwbWFwKGxpc3QoYWxwaGEsIGJldGEpLCBmdW5jdGlvbihhLGIpIHtjb25zdHJ1Y3Rfc3Vydml2YWxfZnVuY3Rpb24oYSxiLDApfSkpCmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYwpgYGAKCmBgYHtyfQp0aW1lcyA8LSBzZXEoZnJvbSA9IDAsIHRvID0gMTY1LCBieSA9IDAuMSkKdGltZXNfZGYgPC0gZGF0YV9mcmFtZSh0ID0gdGltZXMpCgojIyBUcnkgZmlyc3QgcmVhbGl6YXRpb25zCmV4cF9zdXJ2X21vZGVsX3N1cnZfZnVuYyRgUyh0fDEpYFtbMV1dKHRpbWVzWzE6MTBdKQpgYGAKCmBgYHtyfQpleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMkYFModHwwKWBbWzFdXSh0aW1lc1sxOjEwXSkKYGBgCmBgYHtyfQojIyBBcHBseSBhbGwgcmVhbGl6YXRpb25zCmV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsIDwtCiAgICBleHBfc3Vydl9tb2RlbF9zdXJ2X2Z1bmMgJT4lCiAgICBtdXRhdGUodGltZXNfZGYgPSBsaXN0KHRpbWVzX2RmKSkgJT4lCiAgICBtdXRhdGUodGltZXNfZGYgPSBwbWFwKGxpc3QodGltZXNfZGYsIGBTKHR8MSlgLCBgUyh0fDApYCksCiAgICAgICAgICAgICAgICAgICAgICAgICAgIGZ1bmN0aW9uKGRmLCBzMSwgczApIHtkZiAlPiUgbXV0YXRlKHMxID0gczEodCksCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIHMwID0gczAodCkpfSkpICU+JQogICAgc2VsZWN0KC1gUyh0fDEpYCwgLWBTKHR8MClgKSAlPiUKICAgIHVubmVzdChjb2xzID0gYyh0aW1lc19kZikpICU+JQogICAgZ2F0aGVyKGtleSA9IFN0cmF0YSwgdmFsdWUgPSBzdXJ2aXZhbCwgczEsIHMwKSAlPiUKICAgIG11dGF0ZShTdHJhdGEgPSBmYWN0b3IoU3RyYXRhLCAjIFN0cmF0YSBpcyB3aGV0aGVyIG9yIG5vdCBzb21ldGhpbmcgaXMgaG9zdGVkIG9uIG11bHRpcGxlIHJlcG9zIG9yIG5vdAogICAgICAgICAgICAgICAgICAgICAgICAgICAgICBsZXZlbHMgPSBjKCJzMSIsInMwIiksCiAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIGxhYmVscyA9IGMoIm11bHRpIHJlcG9zaXRvcmllcz1ObyIsIm11bHRpIHJlcG9zaXRvcmllcz1ZZXMiKSkpCgojIyBBdmVyYWdlIG9uIHN1cnZpdmFsIHNjYWxlCmV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsX21lYW4gPC0KICAgIGV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsICU+JQogICAgZ3JvdXBfYnkoU3RyYXRhLCB0KSAlPiUKICAgIHN1bW1hcml6ZShzdXJ2aXZhbF9tZWFuID0gbWVhbihzdXJ2aXZhbCksCiAgICAgICAgICAgICAgc3Vydml2YWxfOTV1cHBlciA9IHF1YW50aWxlKHN1cnZpdmFsLCBwcm9icyA9IDAuOTc1KSwKICAgICAgICAgICAgICBzdXJ2aXZhbF85NWxvd2VyID0gcXVhbnRpbGUoc3Vydml2YWwsIHByb2JzID0gMC4wMjUpKQoKZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwKYGBgCgpgYGB7cn0KIyBwbG90IHRoZSBncmFwaHMKKGdncGxvdChkYXRhID0gZXhwX3N1cnZfbW9kZWxfc3Vydml2YWwsIG1hcHBpbmcgPSBhZXMoeCA9IHQsIHkgPSBzdXJ2aXZhbCwgY29sb3IgPSBTdHJhdGEsIGdyb3VwID0gaW50ZXJhY3Rpb24oLmNoYWluLC5kcmF3LFN0cmF0YSkpKSAKICArIHRoZW1lX3N1cnZtaW5lcigpCiArIGdlb21fbGluZShzaXplID0gMC4xLCBhbHBoYSA9IDAuMDIpIAogKyBnZW9tX2xpbmUoZGF0YSA9IGV4cF9zdXJ2X21vZGVsX3N1cnZpdmFsX21lYW4sIG1hcHBpbmcgPSBhZXMoeSA9IHN1cnZpdmFsX21lYW4sIGdyb3VwID0gU3RyYXRhKSkgCiArIGdlb21fbGluZShkYXRhID0gZXhwX3N1cnZfbW9kZWxfc3Vydml2YWxfbWVhbiwgbWFwcGluZyA9IGFlcyh5ID0gc3Vydml2YWxfOTV1cHBlciwgZ3JvdXAgPSBTdHJhdGEpLCBsaW5ldHlwZSA9ICJkb3R0ZWQiKSAKICsgZ2VvbV9saW5lKGRhdGEgPSBleHBfc3Vydl9tb2RlbF9zdXJ2aXZhbF9tZWFuLCBtYXBwaW5nID0gYWVzKHkgPSBzdXJ2aXZhbF85NWxvd2VyLCBncm91cCA9IFN0cmF0YSksIGxpbmV0eXBlID0gImRvdHRlZCIpCiArIHNjYWxlX3lfY29udGludW91cyhsaW1pdHM9YygwLDEpKQogKyBzY2FsZV94X2NvbnRpbnVvdXMoYnJlYWtzID0gYygwLDQwLDgwLDEyMCwxNjApKQogK2xhYnMoeCA9ICJUaW1lIChNb250aHMpIiwgeSA9ICJTdXJ2aXZhbCBwcm9iYWJpbGl0eSIpKQpgYGAKCg==